# importing useful libraries
import numpy as np
import matplotlib.pyplot as plt
Generating data points
# means of the mixtures
mu1 = np.array([0, 0])
mu2 = np.array([3, -3])
mu3 = np.array([3, 3])
mu4 = np.array([-3, 3])
mu5 = np.array([-3, -3])
# covariances of the mixtures
cov1 = np.array([[1, 0], [ 0, 1]])
cov2 = np.array([[1, .5], [ .5, 1]])
cov3 = np.array([[1, -.5], [-.5, 1]])
cov4 = np.array([[1, .5], [ .5, 1]])
cov5 = np.array([[1, -.5], [-.5, 1]])
# 600 points from the mixture
N = 600
data = np.zeros((N, 2))
rng = np.random.default_rng(seed=1)
data[:100,:] = rng.multivariate_normal(mu1, cov1, size=100)
data[100:200,:] = rng.multivariate_normal(mu2, cov2, size=100)
data[200:300,:] = rng.multivariate_normal(mu3, cov3, size=100)
data[300:400,:] = rng.multivariate_normal(mu4, cov4, size=100)
data[400:,:] = rng.multivariate_normal(mu5, cov5, size=200)
Training a Variational Gaussian Mixture model
# training a Variational Gaussian Mixture model
import src.vgm as vgm
model = vgm.VariationalGaussianMixture(K=10, display=True, max_iter=100, plot_period=20)
model.fit(data)
<src.vgm.VariationalGaussianMixture at 0x10a6f5100>
Evolution of the likelihood lower bound
plt.figure(figsize=(8,6))
plt.plot(np.arange(100), model.lower_bound)
plt.xlabel('Iterations')
plt.ylabel('Likelihood lower bound')
plt.title('Evolution of $\mathcal{L}(Q)$ through iterations')
Text(0.5, 1.0, 'Evolution of $\\mathcal{L}(Q)$ through iterations')
Generating data points
# means of the mixtures
mu1 = np.array([0, -2])
mu2 = np.array([0, 0])
mu3 = np.array([0, 2])
# covariances of the mixtures
cov1 = np.array([[2, 0], [ 0, .2]])
cov2 = np.array([[2, 0], [ 0, .2]])
cov3 = np.array([[2, 0], [ 0, .2]])
# 900 points from the mixture
N = 900
data = np.zeros((N, 2))
rng = np.random.default_rng(seed=1)
data[:300,:] = rng.multivariate_normal(mu1, cov1, size=300)
data[300:600,:] = rng.multivariate_normal(mu2, cov2, size=300)
data[600:,:] = rng.multivariate_normal(mu3, cov3, size=300)
Training a Variational Gaussian Mixture model
# training a Variational Gaussian Mixture model
model = vgm.VariationalGaussianMixture(K=10, display=True, max_iter=250, plot_period=20)
model.fit(data)
<src.vgm.VariationalGaussianMixture at 0x162609250>
Generating data points
# means of the mixtures
mu1 = np.array([0, 0])
mu2 = np.array([0, 0])
mu3 = np.array([0, 0])
# covariances of the mixtures
cov1 = np.array([[1, 0], [ 0, .2]])
cov2 = np.array([[.02, -.08], [-.08, 1.5]])
cov3 = np.array([[0.5, 0.4], [0.05, 0.05]])
# 400 points from the mixture
N = 400
data = np.zeros((N, 2))
rng = np.random.default_rng(seed=1)
data[:125,:] = rng.multivariate_normal(mu1, cov1, size=125)
data[125:250,:] = rng.multivariate_normal(mu2, cov2, size=125)
data[250:,:] = rng.multivariate_normal(mu3, cov3, size=150)
Training a Variational Gaussian Mixture model
# training a Variational Gaussian Mixture model
model = vgm.VariationalGaussianMixture(K=10, display=True, max_iter=300, plot_period=20)
model.fit(data)
<src.vgm.VariationalGaussianMixture at 0x16317f280>